This notebook will train a Tensorflow NN to classify touch gestures on the LDR trackpad.
import numpy as np
from tensorflow.keras.utils import to_categorical
# load the gestures from each file
gestures = [
('swipe right', np.loadtxt('data/200Hz x 500ms v3/0-swipe-right.csv', delimiter=',')),
('swipe left', np.loadtxt('data/200Hz x 500ms v3/1-swipe-left.csv', delimiter=',')),
('swipe up', np.loadtxt('data/200Hz x 500ms v3/2-swipe-up.csv', delimiter=',')),
('swipe down', np.loadtxt('data/200Hz x 500ms v3/3-swipe-down.csv', delimiter=',')),
('tap right', np.loadtxt('data/200Hz x 500ms v3/4-tap-right.csv', delimiter=',')),
('tap left', np.loadtxt('data/200Hz x 500ms v3/5-tap-left.csv', delimiter=','))
]
X = np.vstack([data for name, data in gestures])
y = to_categorical(np.concatenate([i * np.ones(len(data)) for i, (name, data) in enumerate(gestures)]))
print('X.shape', X.shape)
print('y.shape', y.shape)
X.shape (200, 2400) y.shape (200, 6)
# we're going to use a CNN (Convolutional Neural Network), so we'll reshape the X
# so that each sample is a WINDOW_SIZE x NUM_SENSORS matrix
NUM_SENSORS = 24
WINDOW_IN_SAMPLES = 100
X = X.reshape((-1, WINDOW_IN_SAMPLES, NUM_SENSORS, 1))
print('X.shape', X.shape)
X.shape (200, 100, 24, 1)
# split into train/test
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=0)
X_train, X_valid, y_train, y_valid = train_test_split(X_train, y_train, test_size=0.2, random_state=0)
# create NN
import tensorflow as tf
from tensorflow.keras import Sequential, layers
nn = Sequential()
nn.add(layers.Conv2D(16, kernel_size=(5, NUM_SENSORS), strides=(2, 1), input_shape=X.shape[1:]))
nn.add(layers.Flatten())
nn.add(layers.Dropout(0.3))
nn.add(layers.Dense(20, activation='relu'))
nn.add(layers.Dense(len(gestures), activation='softmax'))
nn.compile(loss='categorical_crossentropy', metrics=['accuracy'], optimizer='adam')
history = nn.fit(X_train, y_train, epochs=30, batch_size=8, validation_data=(X_valid, y_valid))
Epoch 1/30 15/15 [==============================] - 0s 8ms/step - loss: 1.5551 - accuracy: 0.4083 - val_loss: 1.2653 - val_accuracy: 0.5667 Epoch 2/30 15/15 [==============================] - 0s 4ms/step - loss: 1.0114 - accuracy: 0.6833 - val_loss: 0.6609 - val_accuracy: 0.7667 Epoch 3/30 15/15 [==============================] - 0s 3ms/step - loss: 0.5780 - accuracy: 0.8333 - val_loss: 0.4179 - val_accuracy: 0.8667 Epoch 4/30 15/15 [==============================] - 0s 3ms/step - loss: 0.3708 - accuracy: 0.9000 - val_loss: 0.3099 - val_accuracy: 0.9000 Epoch 5/30 15/15 [==============================] - 0s 3ms/step - loss: 0.2363 - accuracy: 0.9250 - val_loss: 0.2263 - val_accuracy: 0.9333 Epoch 6/30 15/15 [==============================] - 0s 3ms/step - loss: 0.2167 - accuracy: 0.9167 - val_loss: 0.2756 - val_accuracy: 0.9000 Epoch 7/30 15/15 [==============================] - 0s 3ms/step - loss: 0.1882 - accuracy: 0.9333 - val_loss: 0.2780 - val_accuracy: 0.9000 Epoch 8/30 15/15 [==============================] - 0s 3ms/step - loss: 0.1538 - accuracy: 0.9333 - val_loss: 0.1822 - val_accuracy: 0.9333 Epoch 9/30 15/15 [==============================] - 0s 3ms/step - loss: 0.1466 - accuracy: 0.9500 - val_loss: 0.1553 - val_accuracy: 0.9333 Epoch 10/30 15/15 [==============================] - 0s 3ms/step - loss: 0.1342 - accuracy: 0.9250 - val_loss: 0.1302 - val_accuracy: 0.9333 Epoch 11/30 15/15 [==============================] - 0s 3ms/step - loss: 0.1022 - accuracy: 0.9583 - val_loss: 0.1410 - val_accuracy: 0.9000 Epoch 12/30 15/15 [==============================] - 0s 3ms/step - loss: 0.0751 - accuracy: 0.9833 - val_loss: 0.1282 - val_accuracy: 0.9333 Epoch 13/30 15/15 [==============================] - 0s 3ms/step - loss: 0.0600 - accuracy: 0.9833 - val_loss: 0.1301 - val_accuracy: 0.9667 Epoch 14/30 15/15 [==============================] - 0s 3ms/step - loss: 0.0645 - accuracy: 0.9917 - val_loss: 0.1181 - val_accuracy: 0.9667 Epoch 15/30 15/15 [==============================] - 0s 3ms/step - loss: 0.0806 - accuracy: 0.9750 - val_loss: 0.0984 - val_accuracy: 0.9667 Epoch 16/30 15/15 [==============================] - 0s 3ms/step - loss: 0.0447 - accuracy: 0.9833 - val_loss: 0.1059 - val_accuracy: 0.9667 Epoch 17/30 15/15 [==============================] - 0s 3ms/step - loss: 0.0387 - accuracy: 0.9833 - val_loss: 0.1016 - val_accuracy: 0.9667 Epoch 18/30 15/15 [==============================] - 0s 3ms/step - loss: 0.0408 - accuracy: 0.9833 - val_loss: 0.1111 - val_accuracy: 0.9667 Epoch 19/30 15/15 [==============================] - 0s 3ms/step - loss: 0.0316 - accuracy: 1.0000 - val_loss: 0.1223 - val_accuracy: 0.9667 Epoch 20/30 15/15 [==============================] - 0s 3ms/step - loss: 0.0254 - accuracy: 1.0000 - val_loss: 0.1163 - val_accuracy: 0.9667 Epoch 21/30 15/15 [==============================] - 0s 3ms/step - loss: 0.0176 - accuracy: 1.0000 - val_loss: 0.1303 - val_accuracy: 0.9667 Epoch 22/30 15/15 [==============================] - 0s 3ms/step - loss: 0.0172 - accuracy: 1.0000 - val_loss: 0.1120 - val_accuracy: 0.9667 Epoch 23/30 15/15 [==============================] - 0s 3ms/step - loss: 0.0204 - accuracy: 1.0000 - val_loss: 0.1154 - val_accuracy: 0.9667 Epoch 24/30 15/15 [==============================] - 0s 3ms/step - loss: 0.0156 - accuracy: 1.0000 - val_loss: 0.1371 - val_accuracy: 0.9667 Epoch 25/30 15/15 [==============================] - 0s 3ms/step - loss: 0.0119 - accuracy: 1.0000 - val_loss: 0.1200 - val_accuracy: 0.9667 Epoch 26/30 15/15 [==============================] - 0s 3ms/step - loss: 0.0170 - accuracy: 1.0000 - val_loss: 0.1383 - val_accuracy: 0.9667 Epoch 27/30 15/15 [==============================] - 0s 4ms/step - loss: 0.0174 - accuracy: 0.9917 - val_loss: 0.1406 - val_accuracy: 0.9667 Epoch 28/30 15/15 [==============================] - 0s 3ms/step - loss: 0.0102 - accuracy: 1.0000 - val_loss: 0.1490 - val_accuracy: 0.9667 Epoch 29/30 15/15 [==============================] - 0s 3ms/step - loss: 0.0105 - accuracy: 1.0000 - val_loss: 0.1592 - val_accuracy: 0.9667 Epoch 30/30 15/15 [==============================] - 0s 3ms/step - loss: 0.0069 - accuracy: 1.0000 - val_loss: 0.1600 - val_accuracy: 0.9667
# plot train loss
import matplotlib.pyplot as plt
plt.title('Loss')
plt.plot(history.history['loss'], label='train')
plt.plot(history.history['val_loss'], label='validation')
plt.legend()
plt.show()
# compute classification performance
from sklearn.metrics import classification_report
y_true = y_test.argmax(axis=1)
y_pred = nn.predict(X_test).argmax(axis=1)
print(classification_report(y_true, y_pred, target_names=[name for name, data in gestures]))
precision recall f1-score support swipe right 1.00 0.91 0.95 11 swipe left 1.00 0.89 0.94 9 swipe up 0.78 0.88 0.82 8 swipe down 0.79 0.85 0.81 13 tap right 1.00 1.00 1.00 4 tap left 1.00 1.00 1.00 5 accuracy 0.90 50 macro avg 0.93 0.92 0.92 50 weighted avg 0.91 0.90 0.90 50
# port to C++
from tinymlgen import port
print(port(nn, optimize=False))
#ifdef __has_attribute #define HAVE_ATTRIBUTE(x) __has_attribute(x) #else #define HAVE_ATTRIBUTE(x) 0 #endif #if HAVE_ATTRIBUTE(aligned) || (defined(__GNUC__) && !defined(__clang__)) #define DATA_ALIGN_ATTRIBUTE __attribute__((aligned(4))) #else #define DATA_ALIGN_ATTRIBUTE #endif const unsigned char model_data[] DATA_ALIGN_ATTRIBUTE = {...}; const int model_data_len = 71836;